import pandas as pd
import plotly.express as px
import numpy as np
import seaborn as sns
import matplotlib.pyplot as plt
import plotly.graph_objects as go
from scipy.optimize import minimize
from tabulate import tabulate
from scipy.optimize import minimize, LinearConstraint
%matplotlib inline
def load_data(filename, sheet_index, frequency="daily"):
data = pd.read_excel(filename, sheet_name=sheet_index)
data = data.drop([0, 1])
data = data.set_index("Name")
data = data.rename_axis("Date")
data = pd.DataFrame(data)
return data
# Load daily prices
daily_prices = load_data("dataforexam2024.xlsx", sheet_index=0, frequency="daily")
# Load monthly prices
monthly_prices = load_data("dataforexam2024.xlsx", sheet_index=6, frequency="monthly")
Focus first on the two worksheet on stocks. Compute stock returns for both daily and monthly observations. Compute mean, standard deviation, variance, skewness and kurtosis for stocks at daily and monthly frequency. Show the results in a table and comment.
# returns function
def all_stocks_returns(prices):
returns = prices.pct_change()
return returns
#statistics function
def statistics(returns):
mean = returns.mean()
standard_deviation = returns.std()
variance = returns.var()
skewness = returns.skew()
kurtosis = returns.kurtosis()
dataframe = pd.DataFrame({'Mean': mean,
'Standard deviation':standard_deviation,
'Variance':variance,
'Skewness': skewness,
'Kurtosis': kurtosis}
)
return dataframe
daily_returns = all_stocks_returns(prices=daily_prices)
daily_statistics = statistics(returns=daily_returns)
daily_statistics
| Mean | Standard deviation | Variance | Skewness | Kurtosis | |
|---|---|---|---|---|---|
| LEONARDO | 0.000603 | 0.022982 | 0.000528 | -0.335558 | 11.914517 |
| ECOSUNTEK | 0.000666 | 0.031551 | 0.000995 | 2.485601 | 18.456549 |
| LANDI RENZO | 0.000108 | 0.030126 | 0.000908 | 1.048632 | 11.966995 |
| PIRELLI & C | 0.000077 | 0.021669 | 0.000470 | -0.038849 | 6.347566 |
| STELLANTIS | 0.000877 | 0.023909 | 0.000572 | -0.445755 | 5.612135 |
| ... | ... | ... | ... | ... | ... |
| BEGHELLI | 0.000063 | 0.023059 | 0.000532 | 1.954996 | 14.422354 |
| SOL | 0.000735 | 0.017404 | 0.000303 | 0.348680 | 1.804154 |
| DATALOGIC | 0.000136 | 0.024107 | 0.000581 | 0.305468 | 7.551701 |
| BIESSE | 0.000463 | 0.026874 | 0.000722 | -0.149879 | 5.944128 |
| SAFILO GROUP | -0.000374 | 0.028422 | 0.000808 | 0.455984 | 10.134915 |
88 rows × 5 columns
daily_statistics.describe()
| Mean | Standard deviation | Variance | Skewness | Kurtosis | |
|---|---|---|---|---|---|
| count | 88.000000 | 88.000000 | 88.000000 | 88.000000 | 88.000000 |
| mean | 0.000287 | 0.022073 | 0.000522 | 0.522613 | 15.654105 |
| std | 0.000356 | 0.005940 | 0.000305 | 1.147053 | 22.302770 |
| min | -0.000657 | 0.013716 | 0.000188 | -1.266280 | 1.804154 |
| 25% | 0.000087 | 0.017382 | 0.000302 | -0.195129 | 6.134081 |
| 50% | 0.000314 | 0.020648 | 0.000426 | 0.253309 | 10.758447 |
| 75% | 0.000497 | 0.024746 | 0.000612 | 1.076885 | 15.654191 |
| max | 0.001260 | 0.044574 | 0.001987 | 5.260598 | 174.900003 |
monthly_returns = all_stocks_returns(prices=monthly_prices)
monthly_statistics = statistics(returns=monthly_returns)
monthly_statistics
| Mean | Standard deviation | Variance | Skewness | Kurtosis | |
|---|---|---|---|---|---|
| LEONARDO | 0.012094 | 0.109496 | 0.011989 | 0.157740 | 2.783024 |
| ECOSUNTEK | 0.024217 | 0.290621 | 0.084460 | 7.752425 | 71.948842 |
| LANDI RENZO | 0.004255 | 0.166552 | 0.027740 | 3.562122 | 22.438194 |
| PIRELLI & C | -0.000314 | 0.092494 | 0.008555 | -0.447827 | 0.975721 |
| STELLANTIS | 0.020562 | 0.118008 | 0.013926 | -0.239491 | 0.973181 |
| ... | ... | ... | ... | ... | ... |
| BEGHELLI | 0.000510 | 0.091500 | 0.008372 | 1.397631 | 4.410079 |
| SOL | 0.015200 | 0.062259 | 0.003876 | 0.314720 | -0.375706 |
| DATALOGIC | 0.003332 | 0.109487 | 0.011987 | 0.281861 | -0.151402 |
| BIESSE | 0.011034 | 0.126958 | 0.016118 | -0.199362 | 0.192336 |
| SAFILO GROUP | -0.008420 | 0.136432 | 0.018614 | 1.014932 | 3.350994 |
88 rows × 5 columns
monthly_statistics.describe()
| Mean | Standard deviation | Variance | Skewness | Kurtosis | |
|---|---|---|---|---|---|
| count | 88.000000 | 88.000000 | 88.000000 | 88.000000 | 88.000000 |
| mean | 0.005973 | 0.101401 | 0.011888 | 0.705635 | 5.208759 |
| std | 0.008738 | 0.040299 | 0.011487 | 1.509833 | 10.749839 |
| min | -0.018749 | 0.047367 | 0.002244 | -0.919117 | -0.442656 |
| 25% | 0.001284 | 0.072082 | 0.005196 | -0.171418 | 0.855552 |
| 50% | 0.006771 | 0.093278 | 0.008701 | 0.189155 | 2.086702 |
| 75% | 0.011545 | 0.116725 | 0.013625 | 1.083393 | 4.076555 |
| max | 0.027676 | 0.290621 | 0.084460 | 7.752425 | 71.948842 |
Compute the variance-covariance matrix and correlation matrix.
# variance-covariance matrix
covar_matrix_all = daily_returns.cov()
covar_matrix_all
| LEONARDO | ECOSUNTEK | LANDI RENZO | PIRELLI & C | STELLANTIS | PININFARINA | FRENI BREMBO | INTESA SANPAOLO | ILLIMITY BANK | UNICREDIT | ... | GABETTI PROPERTY SLTN. | MFE B | ERG | CEMBRE | SABAF | BEGHELLI | SOL | DATALOGIC | BIESSE | SAFILO GROUP | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| LEONARDO | 0.000528 | 0.000067 | 0.000161 | 0.000192 | 0.000255 | 0.000113 | 0.000158 | 0.000245 | 0.000179 | 0.000288 | ... | 0.000159 | 0.000186 | 0.000122 | 0.000075 | 0.000071 | 0.000085 | 0.000068 | 0.000155 | 0.000203 | 0.000150 |
| ECOSUNTEK | 0.000067 | 0.000995 | 0.000062 | 0.000064 | 0.000083 | 0.000054 | 0.000066 | 0.000066 | 0.000079 | 0.000077 | ... | 0.000026 | 0.000054 | 0.000021 | 0.000043 | 0.000028 | 0.000049 | 0.000048 | 0.000062 | 0.000083 | 0.000071 |
| LANDI RENZO | 0.000161 | 0.000062 | 0.000908 | 0.000176 | 0.000170 | 0.000096 | 0.000119 | 0.000169 | 0.000158 | 0.000209 | ... | 0.000126 | 0.000112 | 0.000109 | 0.000065 | 0.000089 | 0.000085 | 0.000064 | 0.000128 | 0.000173 | 0.000105 |
| PIRELLI & C | 0.000192 | 0.000064 | 0.000176 | 0.000470 | 0.000265 | 0.000104 | 0.000233 | 0.000235 | 0.000200 | 0.000274 | ... | 0.000149 | 0.000186 | 0.000103 | 0.000089 | 0.000079 | 0.000102 | 0.000088 | 0.000207 | 0.000252 | 0.000153 |
| STELLANTIS | 0.000255 | 0.000083 | 0.000170 | 0.000265 | 0.000572 | 0.000158 | 0.000225 | 0.000300 | 0.000229 | 0.000344 | ... | 0.000184 | 0.000237 | 0.000136 | 0.000104 | 0.000098 | 0.000118 | 0.000099 | 0.000221 | 0.000296 | 0.000197 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| BEGHELLI | 0.000085 | 0.000049 | 0.000085 | 0.000102 | 0.000118 | 0.000120 | 0.000090 | 0.000128 | 0.000084 | 0.000142 | ... | 0.000125 | 0.000124 | 0.000063 | 0.000058 | 0.000048 | 0.000532 | 0.000061 | 0.000085 | 0.000130 | 0.000108 |
| SOL | 0.000068 | 0.000048 | 0.000064 | 0.000088 | 0.000099 | 0.000066 | 0.000079 | 0.000089 | 0.000087 | 0.000095 | ... | 0.000076 | 0.000089 | 0.000065 | 0.000059 | 0.000046 | 0.000061 | 0.000303 | 0.000086 | 0.000115 | 0.000090 |
| DATALOGIC | 0.000155 | 0.000062 | 0.000128 | 0.000207 | 0.000221 | 0.000107 | 0.000168 | 0.000184 | 0.000180 | 0.000207 | ... | 0.000095 | 0.000175 | 0.000109 | 0.000097 | 0.000086 | 0.000085 | 0.000086 | 0.000581 | 0.000256 | 0.000150 |
| BIESSE | 0.000203 | 0.000083 | 0.000173 | 0.000252 | 0.000296 | 0.000157 | 0.000224 | 0.000256 | 0.000229 | 0.000291 | ... | 0.000179 | 0.000205 | 0.000133 | 0.000113 | 0.000117 | 0.000130 | 0.000115 | 0.000256 | 0.000722 | 0.000232 |
| SAFILO GROUP | 0.000150 | 0.000071 | 0.000105 | 0.000153 | 0.000197 | 0.000110 | 0.000135 | 0.000192 | 0.000177 | 0.000213 | ... | 0.000158 | 0.000165 | 0.000111 | 0.000083 | 0.000084 | 0.000108 | 0.000090 | 0.000150 | 0.000232 | 0.000808 |
88 rows × 88 columns
# correlation matrix
corr_matrix_all = daily_returns.corr()
corr_matrix_all
| LEONARDO | ECOSUNTEK | LANDI RENZO | PIRELLI & C | STELLANTIS | PININFARINA | FRENI BREMBO | INTESA SANPAOLO | ILLIMITY BANK | UNICREDIT | ... | GABETTI PROPERTY SLTN. | MFE B | ERG | CEMBRE | SABAF | BEGHELLI | SOL | DATALOGIC | BIESSE | SAFILO GROUP | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| LEONARDO | 1.000000 | 0.092882 | 0.232629 | 0.372725 | 0.463566 | 0.142557 | 0.359222 | 0.513007 | 0.391636 | 0.468865 | ... | 0.246897 | 0.340568 | 0.307900 | 0.185737 | 0.167153 | 0.159721 | 0.170430 | 0.280643 | 0.327978 | 0.230270 |
| ECOSUNTEK | 0.092882 | 1.000000 | 0.065169 | 0.097673 | 0.109490 | 0.049604 | 0.110209 | 0.100559 | 0.137828 | 0.090867 | ... | 0.029132 | 0.071995 | 0.038602 | 0.077729 | 0.047523 | 0.067559 | 0.086775 | 0.082168 | 0.098208 | 0.078707 |
| LANDI RENZO | 0.232629 | 0.065169 | 1.000000 | 0.291108 | 0.236049 | 0.091943 | 0.206771 | 0.269919 | 0.287373 | 0.259252 | ... | 0.149148 | 0.157491 | 0.209780 | 0.122290 | 0.160191 | 0.122081 | 0.122059 | 0.176086 | 0.213543 | 0.122978 |
| PIRELLI & C | 0.372725 | 0.097673 | 0.291108 | 1.000000 | 0.535858 | 0.208203 | 0.546082 | 0.566748 | 0.462294 | 0.517339 | ... | 0.238672 | 0.395684 | 0.261908 | 0.218854 | 0.184045 | 0.190652 | 0.242677 | 0.367067 | 0.416366 | 0.230158 |
| STELLANTIS | 0.463566 | 0.109490 | 0.236049 | 0.535858 | 1.000000 | 0.191061 | 0.493128 | 0.604563 | 0.529212 | 0.538014 | ... | 0.275165 | 0.418674 | 0.330045 | 0.245744 | 0.221531 | 0.213826 | 0.238896 | 0.382700 | 0.460425 | 0.290315 |
| ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... | ... |
| BEGHELLI | 0.159721 | 0.067559 | 0.122081 | 0.190652 | 0.213826 | 0.151441 | 0.204087 | 0.267523 | 0.172545 | 0.231029 | ... | 0.194214 | 0.227097 | 0.158379 | 0.142595 | 0.112974 | 1.000000 | 0.153222 | 0.152894 | 0.210112 | 0.164170 |
| SOL | 0.170430 | 0.086775 | 0.122059 | 0.242677 | 0.238896 | 0.109727 | 0.237108 | 0.247258 | 0.286900 | 0.205276 | ... | 0.156871 | 0.215686 | 0.218261 | 0.191077 | 0.141975 | 0.153222 | 1.000000 | 0.205757 | 0.245483 | 0.181831 |
| DATALOGIC | 0.280643 | 0.082168 | 0.176086 | 0.367067 | 0.382700 | 0.128963 | 0.364251 | 0.367646 | 0.359158 | 0.321572 | ... | 0.140437 | 0.305559 | 0.261915 | 0.227364 | 0.193448 | 0.152894 | 0.205757 | 1.000000 | 0.394593 | 0.218535 |
| BIESSE | 0.327978 | 0.098208 | 0.213543 | 0.416366 | 0.460425 | 0.169953 | 0.436465 | 0.458983 | 0.424897 | 0.405855 | ... | 0.237558 | 0.321200 | 0.288462 | 0.237586 | 0.235360 | 0.210112 | 0.245483 | 0.394593 | 1.000000 | 0.303253 |
| SAFILO GROUP | 0.230270 | 0.078707 | 0.122978 | 0.230158 | 0.290315 | 0.112739 | 0.249259 | 0.324955 | 0.295702 | 0.281141 | ... | 0.198799 | 0.244614 | 0.226359 | 0.164465 | 0.160612 | 0.164170 | 0.181831 | 0.218535 | 0.303253 | 1.000000 |
88 rows × 88 columns
Select a sample of 10-12 securities. You should motivate your choice of securities. The choice can be made, for example, on the basis of correlation structure emerging from variance- covariance matrix. Explain and justify your choices.
def chosen_stocks(prices):
Chosen_stocks = pd.DataFrame({
'Stellantis': prices["STELLANTIS"],
'Fineco': prices["FINECOBANK SPA"],
'Alerion clean power': prices["ALERION CLEAN POWER"],
'El en': prices["EL EN"],
'Amplifon': prices["AMPLIFON"],
'Interpump group': prices["INTERPUMP GROUP"],
'Buzzi': prices["BUZZI"],
'Danieli': prices["DANIELI"],
'Italmobiliare': prices["ITALMOBILIARE"],
'Sol': prices["SOL"],
'Terna rete elettrica naz': prices["TERNA RETE ELETTRICA NAZ"],
'Exprivia': prices["EXPRIVIA"]
})
return Chosen_stocks
Chosen_stock_daily = chosen_stocks(prices=daily_prices)
Chosen_stock_monthly = chosen_stocks(prices=monthly_prices)
We employed two approaches for stock selection: a quantitative approach focusing on key metrics such as expected return, standard deviation, and correlation matrix, and a qualitative approach considering the company's business type, industry sector, and fundamental financial data from the balance sheet. The objective was to compile a list of stocks that is highly diversified both qualitatively and quantitatively.
Plot the behavior of fund prices you have chosen, both with daily and monthly frequency during the entire sample size.
# daily plot
fig = px.line(Chosen_stock_daily)
fig.update_layout(
xaxis_title='Date',
yaxis_title='Prices',
legend_title='Legend',
font=dict(family="Arial", size=12, color="Black"),
template="plotly_white",
height=600,
width=1000
)
# add title
fig.show()
# monthly plot
fig = px.line(Chosen_stock_monthly)
fig.update_layout(
xaxis_title='Date',
yaxis_title='Prices',
legend_title='Legend',
font=dict(family="Arial", size=12, color="Black"),
template="plotly_white",
height=600,
width=1000
)
fig.show()
def optimalmeanvariance(returns, frequency):
np.random.seed(100)
num_portfolios = 100000
all_weights = np.zeros((num_portfolios, len(returns.columns)))
ret_arr = np.zeros(num_portfolios)
vol_arr = np.zeros(num_portfolios)
sharpe_arr = np.zeros(num_portfolios)
for x in range(num_portfolios):
weights = np.array(2 * np.random.random(len(returns.columns))-1)
weights = weights / np.sum(weights)
all_weights[x, :] = weights
ret_arr[x] = np.sum((((returns.mean() + 1) ** frequency) - 1) * weights)
vol_arr[x] = np.sqrt(np.dot(weights.T, np.dot((returns.cov() * frequency), weights)))
sharpe_arr[x] = ret_arr[x] / vol_arr[x]
reddot = (sharpe_arr.argmax())
max_sr_ret = ret_arr[sharpe_arr.argmax()]
max_sr_vol = vol_arr[sharpe_arr.argmax()]
df = pd.DataFrame({'Ret_max_sr':round(max_sr_ret,2), 'Vol_max_vol':round(max_sr_vol,2)}, index=['0'])
weights_optimal_portfolio = all_weights[reddot, :]
asset_list = list(returns)
portfolio_weights = {asset: round(weight, 2) for asset, weight in zip(asset_list, weights_optimal_portfolio)}
# Create DataFrame for optimal portfolio weights
df_weights = pd.DataFrame(portfolio_weights, index=['WeightMV']).T
return df_weights, df
Chosen_daily_returns = Chosen_stock_daily.pct_change()
daily_meanvariance = optimalmeanvariance(returns = Chosen_daily_returns, frequency=252)
daily_meanvariance[0]
| WeightMV | |
|---|---|
| Stellantis | 0.12 |
| Fineco | 0.05 |
| Alerion clean power | 0.22 |
| El en | 0.22 |
| Amplifon | 0.18 |
| Interpump group | 0.04 |
| Buzzi | -0.02 |
| Danieli | -0.17 |
| Italmobiliare | 0.10 |
| Sol | 0.19 |
| Terna rete elettrica naz | -0.03 |
| Exprivia | 0.11 |
daily_meanvariance[1]
| Ret_max_sr | Vol_max_vol | |
|---|---|---|
| 0 | 0.3 | 0.22 |
# monthly returns of the chosen stocks
Chosen_monthly_returns = Chosen_stock_monthly.pct_change()
monthly_meanvariance = optimalmeanvariance(returns = Chosen_monthly_returns, frequency=12)
monthly_meanvariance[0]
| WeightMV | |
|---|---|
| Stellantis | 0.29 |
| Fineco | -0.05 |
| Alerion clean power | 0.27 |
| El en | 0.12 |
| Amplifon | 0.25 |
| Interpump group | -0.09 |
| Buzzi | 0.01 |
| Danieli | -0.22 |
| Italmobiliare | -0.05 |
| Sol | 0.24 |
| Terna rete elettrica naz | 0.20 |
| Exprivia | 0.04 |
monthly_meanvariance[1]
| Ret_max_sr | Vol_max_vol | |
|---|---|---|
| 0 | 0.32 | 0.23 |
# Function with non negative constraint
def optimalmeanvariance_nonnegativeconstraint(returns, frequency):
np.random.seed(100)
num_portfolios = 100000
all_weights = np.zeros((num_portfolios, len(returns.columns)))
ret_arr = np.zeros(num_portfolios)
vol_arr = np.zeros(num_portfolios)
sharpe_arr = np.zeros(num_portfolios)
for x in range(num_portfolios):
weights = np.array(np.random.random(len(returns.columns)))
weights = weights / np.sum(weights)
all_weights[x, :] = weights
ret_arr[x] = np.sum((((returns.mean() + 1) ** frequency) - 1) * weights)
vol_arr[x] = np.sqrt(np.dot(weights.T, np.dot((returns.cov() * frequency), weights)))
sharpe_arr[x] = ret_arr[x] / vol_arr[x]
reddot = (sharpe_arr.argmax())
max_sr_ret = ret_arr[sharpe_arr.argmax()]
max_sr_vol = vol_arr[sharpe_arr.argmax()]
df = pd.DataFrame({'Ret_max_sr':round(max_sr_ret,2), 'Vol_max_vol':round(max_sr_vol,2)}, index=['0'])
weights_optimal_portfolio = all_weights[reddot, :]
asset_list = list(returns)
portfolio_weights = {asset: round(weight, 2) for asset, weight in zip(asset_list, weights_optimal_portfolio)}
df_weights = pd.DataFrame(portfolio_weights, index=['WeightMV']).T
return df_weights, df
daily_meanvariance_nn = optimalmeanvariance_nonnegativeconstraint(returns = Chosen_daily_returns, frequency=252)
daily_meanvariance_nn[0]
| WeightMV | |
|---|---|
| Stellantis | 0.04 |
| Fineco | 0.07 |
| Alerion clean power | 0.20 |
| El en | 0.10 |
| Amplifon | 0.20 |
| Interpump group | 0.05 |
| Buzzi | 0.02 |
| Danieli | 0.03 |
| Italmobiliare | 0.16 |
| Sol | 0.11 |
| Terna rete elettrica naz | 0.00 |
| Exprivia | 0.03 |
daily_meanvariance_nn[1]
| Ret_max_sr | Vol_max_vol | |
|---|---|---|
| 0 | 0.25 | 0.19 |
monthly_meanvariance_nn = optimalmeanvariance_nonnegativeconstraint(returns = Chosen_monthly_returns, frequency=12)
monthly_meanvariance_nn[0]
| WeightMV | |
|---|---|
| Stellantis | 0.06 |
| Fineco | 0.01 |
| Alerion clean power | 0.19 |
| El en | 0.03 |
| Amplifon | 0.20 |
| Interpump group | 0.07 |
| Buzzi | 0.03 |
| Danieli | 0.02 |
| Italmobiliare | 0.14 |
| Sol | 0.18 |
| Terna rete elettrica naz | 0.04 |
| Exprivia | 0.04 |
monthly_meanvariance_nn[1]
| Ret_max_sr | Vol_max_vol | |
|---|---|---|
| 0 | 0.25 | 0.19 |
Chosen_daily_returns = Chosen_stock_daily.pct_change().dropna()
def portfolio(returns, weights):
returns_df = returns
weights_df = weights.T
weighted_returns = returns_df * weights_df.values
portfolio_returns = weighted_returns.sum(axis=1)
return portfolio_returns
daily_constrained = portfolio(returns = Chosen_daily_returns, weights=daily_meanvariance_nn[0])
daily_nonconstrained = portfolio(returns = Chosen_daily_returns, weights=daily_meanvariance[0])
monthly_constrained = portfolio(returns = Chosen_monthly_returns, weights=monthly_meanvariance_nn[0])
monthly_nonconstrained = portfolio(returns = Chosen_monthly_returns, weights=monthly_meanvariance[0])
daily_equally_weighted = Chosen_daily_returns.mean(axis=1)
monthly_equally_weighted = Chosen_monthly_returns.mean(axis=1)
daily_df_portfolio = pd.DataFrame({'daily_constrained': daily_constrained,
'daily_nonconstrained':daily_nonconstrained,
'daily_equally_weighted':daily_equally_weighted})
monthly_df_portfolio = pd.DataFrame({'monthly_constrained':monthly_constrained,
'monthly_nonconstrained': monthly_nonconstrained,
'monthly_equally_weighted': monthly_equally_weighted})
daily_stats = statistics(returns=daily_df_portfolio)
daily_stats
| Mean | Standard deviation | Variance | Skewness | Kurtosis | |
|---|---|---|---|---|---|
| daily_constrained | 0.000892 | 0.012320 | 0.000152 | -0.445173 | 7.396169 |
| daily_nonconstrained | 0.001041 | 0.014146 | 0.000200 | -0.250771 | 5.299539 |
| daily_equally_weighted | 0.000755 | 0.011959 | 0.000143 | -0.645239 | 7.489164 |
monthly_stats = statistics(returns=monthly_df_portfolio)
monthly_stats
| Mean | Standard deviation | Variance | Skewness | Kurtosis | |
|---|---|---|---|---|---|
| monthly_constrained | 0.018611 | 0.054657 | 0.002987 | 0.017367 | 0.327975 |
| monthly_nonconstrained | 0.023061 | 0.065891 | 0.004342 | 0.012150 | 0.542810 |
| monthly_equally_weighted | 0.016458 | 0.057912 | 0.003354 | -0.224533 | 0.020129 |
def compare_cum_return(df):
fig = px.line(df)
fig.update_layout(
xaxis_title='Date',
yaxis_title='Cumulative Return',
legend_title='Legend',
font=dict(family="Arial", size=12, color="Black"),
template="plotly_white"
)
return fig.show()
daily_compare = compare_cum_return(df=daily_df_portfolio.cumsum())
monthly_compare = compare_cum_return(df=monthly_df_portfolio.cumsum())
def annualized_return_cov(returns, frequency,):
ann_ret = ((returns.mean()+1)**frequency)-1
cov_matrix = returns.cov()*frequency
return ann_ret, cov_matrix
def portfolio_volatility(x, cov_matrix):
return np.sqrt(x @ cov_matrix @ x)
def efficient_frontier(returns, cov_matrix, xmin, xmax):
x0 = np.ones(len(returns)) / len(returns)
expected_returns = np.linspace(returns.min(), returns.max(), 40)
optimal_allocations = []
volatilities = []
for target_return in expected_returns:
linear_constraints = [LinearConstraint(np.ones(len(returns)), 1, 1), {'type': 'eq', 'fun': lambda x: x @ returns - target_return}]
result = minimize(portfolio_volatility, x0, args=(cov_matrix,), constraints=linear_constraints)
optimal_allocations.append(result.x)
volatilities.append(result.fun)
min_variance_index = np.argmin(volatilities)
min_variance_volatility = volatilities[min_variance_index]
min_variance_return = expected_returns[min_variance_index]
gmvp_allocation = optimal_allocations[min_variance_index]
fig = go.Figure()
fig.add_trace(go.Scatter(
x=volatilities[min_variance_index:],
y=expected_returns[min_variance_index:],
mode='lines'
))
fig.update_layout(
template='plotly_white',
xaxis=dict(title='Volatility', range=[xmin, xmax]),
yaxis=dict(title='Return'),
width=800,
height=500,
)
return fig.show(), gmvp_allocation
exp_ret_d = annualized_return_cov(returns=Chosen_daily_returns, frequency=252)
exp_ret_d = exp_ret_d[0]
cov_matrix_d = annualized_return_cov(returns=Chosen_daily_returns, frequency=252)
cov_matrix_d = cov_matrix_d[1]
daily_EF = efficient_frontier(returns= exp_ret_d, cov_matrix=cov_matrix_d, xmin= 0.15,xmax=0.28)
exp_ret_m = annualized_return_cov(returns=Chosen_monthly_returns, frequency=12)
exp_ret_m = exp_ret_m[0]
cov_matrix_m = annualized_return_cov(returns=Chosen_monthly_returns, frequency=12)
cov_matrix_m = cov_matrix_m[1]
monthly_EF = efficient_frontier(returns= exp_ret_m, cov_matrix=cov_matrix_m, xmin= 0.12,xmax=0.28)
daily_index = load_data("addendumdata2024.xlsx", sheet_index=0, frequency="daily")
daily_index = pd.DataFrame(daily_index["FTSE ITALIA ALL SHARE - TOT RETURN IND"])
daily_index_returns = daily_index.pct_change()
daily_index_returns["Portfolio"] = daily_constrained
daily_index_stats = statistics(returns = daily_index_returns)
daily_index_stats
| Mean | Standard deviation | Variance | Skewness | Kurtosis | |
|---|---|---|---|---|---|
| FTSE ITALIA ALL SHARE - TOT RETURN IND | 0.000428 | 0.013508 | 0.000182 | -1.332452 | 15.580739 |
| Portfolio | 0.000892 | 0.012320 | 0.000152 | -0.445173 | 7.396169 |
#add the portfolio data for the graph
daily_index_returns["Portfolio"] = daily_constrained
compare_cum_return(df=daily_index_returns.cumsum())
monthly_index = load_data("addendumdata2024.xlsx", sheet_index=1, frequency="monthly")
monthly_index = pd.DataFrame(monthly_index["FTSE ITALIA ALL SHARE - TOT RETURN IND"])
monthly_index_returns = monthly_index.pct_change()
#add the portfolio data for the graph and the statistics (constrained)
monthly_index_returns["Portfolio"] = monthly_constrained
monthly_index_stats = statistics(returns = monthly_index_returns)
monthly_index_stats
| Mean | Standard deviation | Variance | Skewness | Kurtosis | |
|---|---|---|---|---|---|
| FTSE ITALIA ALL SHARE - TOT RETURN IND | 0.009010 | 0.057128 | 0.003264 | -0.554310 | 3.077909 |
| Portfolio | 0.018611 | 0.054657 | 0.002987 | 0.017367 | 0.327975 |
compare_cum_return(df=monthly_index_returns.cumsum())
daily_index_returns=daily_index_returns.drop(["Portfolio"], axis=1)
monthly_index_returns=monthly_index_returns.drop(["Portfolio"], axis=1)
# beta of the chosen asset
x = daily_index_returns.dropna().values
asset_list = list(Chosen_daily_returns.columns)
beta = []
for i in asset_list:
x = daily_index_returns.dropna().values
asset = Chosen_daily_returns[i].dropna().values
b, a = np.polyfit(x.flatten(), asset.flatten(), 1)
beta.append(b)
beta_asset = pd.DataFrame(np.matrix(beta), columns=asset_list)
# beta of the portoflio
portoflio = daily_df_portfolio["daily_constrained"].dropna().values
b, a = np.polyfit(x.flatten(), portoflio.flatten(), 1)
beta_asset["Portfolio"] = b
beta_asset
| Stellantis | Fineco | Alerion clean power | El en | Amplifon | Interpump group | Buzzi | Danieli | Italmobiliare | Sol | Terna rete elettrica naz | Exprivia | Portfolio | |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
| 0 | 1.298044 | 1.051708 | 0.441899 | 0.789416 | 0.648115 | 0.843775 | 0.986129 | 0.74928 | 0.500786 | 0.425048 | 0.61998 | 0.667292 | 0.653775 |
def annualized_ret(returns, frequency):
ann_ret = (returns.mean()+1)**frequency-1
return ann_ret
def SML(rf, Rm, B):
sml = rf + (B *(Rm-rf))
return sml
Rm = annualized_ret(returns= daily_index_returns, frequency=252)
SML_stellantis = SML(B = beta_asset["Stellantis"].values, Rm=Rm, rf=.03)
Ret_stellantis = annualized_ret(returns=Chosen_daily_returns["Stellantis"], frequency=252)
SML_portfolio = SML(B = beta_asset["Portfolio"].values, Rm=Rm, rf=.03)
Ret_portfolio = annualized_ret(returns=daily_constrained, frequency=252)
SML_buzzi = SML(B = beta_asset["Buzzi"].values, Rm=Rm, rf=.03)
Ret_buzzi = annualized_ret(returns=Chosen_daily_returns["Buzzi"], frequency=252)
beta_SML = pd.DataFrame({'Beta Portfolio': beta_asset["Portfolio"],
'Beta Stellantis': beta_asset["Stellantis"],
'Beta Buzzi': beta_asset["Buzzi"],
'Beta rf': 0,
})
SML = pd.DataFrame({'SML_portfolio': SML_portfolio,
'SML_stellantis': SML_stellantis,
'SML_buzzi':SML_buzzi,
'SML_rf': 0.03,
})
index = ["Stellantis", "Portfolio", "Buzzi", "Risk Free"]
beta_values = beta_SML.values.flatten()
returns_values = SML.values.flatten()
df = pd.DataFrame({'Beta': beta_values, 'Returns_SML': returns_values}, index=index)
df
| Beta | Returns_SML | |
|---|---|---|
| Stellantis | 0.653775 | 0.084792 |
| Portfolio | 1.298044 | 0.138787 |
| Buzzi | 0.986129 | 0.112646 |
| Risk Free | 0.000000 | 0.030000 |
fig = px.line(df, x=df["Beta"], y=df["Returns_SML"],
title='Security Market Line',
line_shape='linear', template='plotly_white')
scatter_trace = go.Scatter(x=beta_asset["Stellantis"], y=Ret_stellantis.flatten(), mode="markers", name="Stellantis")
fig.add_trace(scatter_trace)
scatter_trace = go.Scatter(x=beta_asset["Portfolio"], y=Ret_portfolio.flatten(), mode="markers", name="Portfolio")
fig.add_trace(scatter_trace)
scatter_trace = go.Scatter(x=beta_asset["Buzzi"], y=Ret_buzzi.flatten(), mode="markers", name="Buzzi")
fig.add_trace(scatter_trace)
fig.show()
#beta = same
Rm = annualized_ret(returns= daily_index_returns, frequency=12)
Ret_stellantis = annualized_ret(returns=Chosen_monthly_returns["Stellantis"], frequency=12)
Ret_portfolio = annualized_ret(returns=monthly_constrained, frequency=12)
Ret_buzzi = annualized_ret(returns=Chosen_monthly_returns["Buzzi"], frequency=12)
SML = pd.DataFrame({'SML_portfolio': SML_portfolio,
'SML_stellantis': SML_stellantis,
'SML_buzzi':SML_buzzi,
'SML_rf': 0.03,
})
index = ["Stellantis", "Portfolio", "Buzzi", "Risk Free"]
beta_values = beta_SML.values.flatten()
returns_values = SML.values.flatten()
df = pd.DataFrame({'Beta': beta_values, 'Returns_SML': returns_values}, index=index)
fig = px.line(df, x=df["Beta"], y=df["Returns_SML"],
title='Security Market Line Monthly',
line_shape='linear', template='plotly_white')
scatter_trace = go.Scatter(x=beta_asset["Stellantis"], y=Ret_stellantis.flatten(), mode="markers", name="Stellantis")
fig.add_trace(scatter_trace)
scatter_trace = go.Scatter(x=beta_asset["Portfolio"], y=Ret_portfolio.flatten(), mode="markers", name="Portfolio")
fig.add_trace(scatter_trace)
scatter_trace = go.Scatter(x=beta_asset["Buzzi"], y=Ret_buzzi.flatten(), mode="markers", name="Buzzi")
fig.add_trace(scatter_trace)
fig.show()
# market cap for the market neutral weights
Stellantis = 61.943
Fineco = 8.379
Alerion_clean_power = 1.38
El_en = 0.7449
amplifon = 6.70
interpump_group = 4.99
buzzi = 5.72
danieli = 2.21
italmobiliare = 1.17
sol = 2.28
terna_rete_elettrica = 15.57
exprivia = 0.0855
company = ["Stellantis", "Fineco", "Alerion Clean Power", "El_en", "Amplifon", "Interpump Group", "Buzzi", "Danieli", "Italmobiliare", "Sol", "Terna Rete Elettrica", "Exprivia"]
marketcap = [61.943, 8.379, 1.38, 0.7449, 6.70, 4.99, 5.72, 2.21, 1.17, 2.28, 15.57, 0.0855]
tot = sum(marketcap)
weights = [(marketcap / tot) for marketcap in marketcap]
# Risk Aversion parameter
initial_weights = np.array(weights)
ret = np.sum((((Chosen_daily_returns.mean()+1)**252)-1) * initial_weights)
var = np.dot(initial_weights.T, np.dot((Chosen_daily_returns.cov()*252), initial_weights))
risk_free = 0.03
delta_daily = (ret - risk_free)/var
ret = np.sum((((Chosen_monthly_returns.mean()+1)**12)-1) * initial_weights) #da annualizzare per i mesi
var = np.dot(initial_weights.T, np.dot((Chosen_monthly_returns.cov()*12), initial_weights))
risk_free = 0.03
delta_monthly = (ret - risk_free)/var
def Black_Litterman(y, delta, risk_free):
# cov matrix
cov_chosen = y.cov()
Cov_risk = cov_chosen * delta_daily
# Equilibrium return
I = np.array([1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1]).T
equilibrium_returns = risk_free * I + np.dot(Cov_risk, initial_weights.T)
# tao
tao = 1/8 #anni
prior_variance = tao * cov_chosen
# Views
# stellantis = 8%
# fineco = 6%
# Amplifon will outperform Danieli by 4%
# Half of the market share of Buzzi and Alerion Clean Power will outperform Italmobiliare by 6%
#views
Q = np.array([0.08,0.06,0.04,0.06]).T
# link matrix
P=np.array([[1,0,0,0,0,0,0,0,0,0,0,0],
[0,1,0,0,0,0,0,0,0,0,0,0],
[0,0,0,0,1,0,0,-1,0,0,0,0],
[0,0,0.5,0,0,0,0.5,0,-1,0,0,0]])
P_return = np.dot(P, equilibrium_returns.T)
# trust
opinion = np.array([1, 1, 1, 1])
theta = np.zeros((len(Q), len(Q)))
for x in range(len(Q)):
first = np.dot(P[x, :], prior_variance)
with_opinion = opinion[x] * first
value = np.dot(with_opinion, P[x, :].T)
theta[x, x] = value
theta_inv = np.linalg.inv(theta)
prior_variance_inv = np.linalg.inv(prior_variance)
a = np.dot(P.T, theta_inv)
b = np.dot(a, P)
c = np.linalg.inv(prior_variance_inv + b)
d = np.dot(prior_variance_inv, equilibrium_returns)
e = np.dot(a, Q)
return_BL = np.dot(c, d + e)
variance_BL = np.linalg.inv(prior_variance_inv + b)
return return_BL, variance_BL, equilibrium_returns,
def BL_bayes_allocation_opt_sim(returns, cov_matrix,frequency, name):
np.random.seed(100)
num_portfolios = 10000
all_weights = np.zeros((num_portfolios, 12))
ret_arr = np.zeros(num_portfolios)
vol_arr = np.zeros(num_portfolios)
sharpe_arr = np.zeros(num_portfolios)
for x in range(num_portfolios):
weights = np.array(np.random.random(12))
weights = weights / np.sum(weights)
all_weights[x, :] = weights
ret_arr[x] = np.sum((returns*frequency) * weights)
vol_arr[x] = np.sqrt(np.dot(weights.T, np.dot((cov_matrix * frequency), weights)))
sharpe_arr[x] = ret_arr[x] / vol_arr[x]
reddot = (sharpe_arr.argmax())
max_sr_ret = ret_arr[sharpe_arr.argmax()]
max_sr_vol = vol_arr[sharpe_arr.argmax()]
df = pd.DataFrame({'Ret_max_sr':round(max_sr_ret,2), 'Vol_max_vol':round(max_sr_vol,2)}, index=['0'])
weights_optimal_portfolio = all_weights[reddot, :]
asset_list = list(Chosen_daily_returns)
portfolio_weights = {asset: round(weight, 2) for asset, weight in zip(asset_list, weights_optimal_portfolio)}
BLweights = pd.DataFrame(portfolio_weights, index=['Weight'+ name]).T
return BLweights, df
BL = Black_Litterman(y = Chosen_daily_returns, delta=delta_daily, risk_free=0.03)
returnsBL = pd.DataFrame(BL[0], index=list(Chosen_daily_returns), columns=["BL Ret"])
varianceBL = pd.DataFrame(BL[1], index=list(Chosen_daily_returns), columns=list(Chosen_daily_returns))
BLweights, df = BL_bayes_allocation_opt_sim(returns= np.array(returnsBL.T), cov_matrix=varianceBL,frequency=252, name ="BL")
BLweights
| WeightBL | |
|---|---|
| Stellantis | 0.17 |
| Fineco | 0.15 |
| Alerion clean power | 0.12 |
| El en | 0.01 |
| Amplifon | 0.16 |
| Interpump group | 0.02 |
| Buzzi | 0.15 |
| Danieli | 0.03 |
| Italmobiliare | 0.02 |
| Sol | 0.09 |
| Terna rete elettrica naz | 0.08 |
| Exprivia | 0.02 |
BL_ret_daily = pd.DataFrame(portfolio(returns=Chosen_daily_returns, weights=BLweights))
BLweights["WeightMV"] = daily_meanvariance_nn[0]
BLweights
| WeightBL | WeightMV | |
|---|---|---|
| Stellantis | 0.17 | 0.04 |
| Fineco | 0.15 | 0.07 |
| Alerion clean power | 0.12 | 0.20 |
| El en | 0.01 | 0.10 |
| Amplifon | 0.16 | 0.20 |
| Interpump group | 0.02 | 0.05 |
| Buzzi | 0.15 | 0.02 |
| Danieli | 0.03 | 0.03 |
| Italmobiliare | 0.02 | 0.16 |
| Sol | 0.09 | 0.11 |
| Terna rete elettrica naz | 0.08 | 0.00 |
| Exprivia | 0.02 | 0.03 |
BLweights.plot.bar(figsize=(10,5))
plt.show()
BLstats = statistics(returns = BL_ret_daily)
BLstats
| Mean | Standard deviation | Variance | Skewness | Kurtosis | |
|---|---|---|---|---|---|
| 0 | 0.000815 | 0.013161 | 0.000173 | -0.693014 | 9.164812 |
#BL returns
BL_monthly = Black_Litterman(y = Chosen_monthly_returns, delta=delta_monthly, risk_free=0.03)
returnsBL_monthly = pd.DataFrame(BL_monthly[0], index=list(Chosen_daily_returns), columns=["BL Ret"])
varianceBL_monthly = pd.DataFrame(BL_monthly[1], index=list(Chosen_daily_returns), columns=list(Chosen_daily_returns))
#mean variance opt
BLweights, df = BL_bayes_allocation_opt_sim(returns= np.array(returnsBL_monthly.T), cov_matrix=varianceBL_monthly,frequency=12, name ="BL")
BLweights
| WeightBL | |
|---|---|
| Stellantis | 0.17 |
| Fineco | 0.15 |
| Alerion clean power | 0.12 |
| El en | 0.01 |
| Amplifon | 0.16 |
| Interpump group | 0.02 |
| Buzzi | 0.15 |
| Danieli | 0.03 |
| Italmobiliare | 0.02 |
| Sol | 0.09 |
| Terna rete elettrica naz | 0.08 |
| Exprivia | 0.02 |
BL_ret_monthly = pd.DataFrame(portfolio(returns=Chosen_monthly_returns, weights=BLweights))
BLweights["WeightMV"] = monthly_meanvariance_nn[0]
BLweights
| WeightBL | WeightMV | |
|---|---|---|
| Stellantis | 0.17 | 0.06 |
| Fineco | 0.15 | 0.01 |
| Alerion clean power | 0.12 | 0.19 |
| El en | 0.01 | 0.03 |
| Amplifon | 0.16 | 0.20 |
| Interpump group | 0.02 | 0.07 |
| Buzzi | 0.15 | 0.03 |
| Danieli | 0.03 | 0.02 |
| Italmobiliare | 0.02 | 0.14 |
| Sol | 0.09 | 0.18 |
| Terna rete elettrica naz | 0.08 | 0.04 |
| Exprivia | 0.02 | 0.04 |
BLstats_monthly = pd.DataFrame(statistics(returns = BL_ret_monthly))
BLstats_monthly
| Mean | Standard deviation | Variance | Skewness | Kurtosis | |
|---|---|---|---|---|---|
| 0 | 0.017249 | 0.058504 | 0.003423 | -0.358695 | 0.454747 |
def Bayesian_model(F):
# Prior and conditional likelihood
mean_sample = F.mean()
cov_sample = F.cov()
cov_sample = cov_sample.to_numpy()
mean_prior = mean_sample + 1 * np.sqrt(np.diag(cov_sample))
T_prior = 2
cov_prior = cov_sample * T_prior
# Posterior
T = len(F)
sigma_inv = np.linalg.inv(cov_sample)
delta_inv = np.linalg.inv(cov_prior)
mean_BY = np.linalg.inv(T * sigma_inv + delta_inv) @ (T * sigma_inv @ mean_sample + delta_inv @ mean_prior)
var_BY = np.linalg.inv(T * sigma_inv + delta_inv)
return mean_BY, var_BY
Daily_bayes = Bayesian_model(F=Chosen_daily_returns)
daily_mean_bayes = pd.DataFrame(Daily_bayes[0],index=list(Chosen_daily_returns), columns=["Bayes Ret"])
daily_cov_bayes = pd.DataFrame(Daily_bayes[1], index=list(Chosen_daily_returns), columns=list(Chosen_daily_returns))
#MV
daily_bayes_MV = BL_bayes_allocation_opt_sim(returns= np.array(daily_mean_bayes.T),
cov_matrix=daily_cov_bayes,frequency=252, name ="Bayes")
daily_bayes_weights = daily_bayes_MV[0]
#ret
daily_ret_bayes = pd.DataFrame(portfolio(returns=Chosen_daily_returns, weights=daily_bayes_weights))
#STATISTICS BAYES
daily_bayes_stats = statistics(returns=daily_ret_bayes)
daily_bayes_stats
| Mean | Standard deviation | Variance | Skewness | Kurtosis | |
|---|---|---|---|---|---|
| 0 | 0.000837 | 0.011774 | 0.000139 | -0.536769 | 7.248386 |
Monthly_bayes = Bayesian_model(F=Chosen_monthly_returns)
monthly_mean_bayes = pd.DataFrame(Monthly_bayes[0],index=list(Chosen_monthly_returns), columns=["Bayes Ret"])
monthly_cov_bayes = pd.DataFrame(Monthly_bayes[1], index=list(Chosen_monthly_returns), columns=list(Chosen_monthly_returns))
#MV
monthly_bayes_MV = BL_bayes_allocation_opt_sim(returns= np.array(monthly_mean_bayes.T),
cov_matrix=monthly_cov_bayes,frequency=12, name ="Bayes")
monthly_bayes_weights = daily_bayes_MV[0]
#ret
monthly_ret_bayes = pd.DataFrame(portfolio(returns=Chosen_monthly_returns, weights=monthly_bayes_weights))
#STATISTICS BAYES
monthly_bayes_stats = statistics(returns=monthly_ret_bayes)
monthly_bayes_stats
| Mean | Standard deviation | Variance | Skewness | Kurtosis | |
|---|---|---|---|---|---|
| 0 | 0.01806 | 0.05521 | 0.003048 | 0.038032 | 0.266699 |
#weights fron the efficient frontier
daily_weightsGMPV = pd.DataFrame({'Weights':daily_EF[1]}, index=list(Chosen_daily_returns))
monthly_weightsGMPV = pd.DataFrame({'Weights':monthly_EF[1]}, index=list(Chosen_daily_returns))
#returns
daily_retGMPV = pd.DataFrame(portfolio(returns=Chosen_daily_returns, weights=daily_weightsGMPV), columns=["GMPV"])
monthly_retGMPV = pd.DataFrame(portfolio(returns=Chosen_monthly_returns, weights=monthly_weightsGMPV), columns=["GMPV"])
#stats
dailyGMPV_stats= statistics(returns=daily_retGMPV)
dailyGMPV_stats
| Mean | Standard deviation | Variance | Skewness | Kurtosis | |
|---|---|---|---|---|---|
| GMPV | 0.000608 | 0.01028 | 0.000106 | -0.536998 | 7.664769 |
#stats
monthlyGMPV_stats= statistics(returns=monthly_retGMPV)
monthlyGMPV_stats
| Mean | Standard deviation | Variance | Skewness | Kurtosis | |
|---|---|---|---|---|---|
| GMPV | 0.009218 | 0.03756 | 0.001411 | 0.054335 | 0.275679 |
#add again the portfolio column
daily_index_returns["Portfolio"] = daily_constrained
monthly_index_returns["Portfolio"] = monthly_constrained
daily_index_returns["GMPV"]=daily_retGMPV
monthly_index_returns["GMPV"]=monthly_retGMPV
compare_cum_return(df=daily_index_returns.cumsum())
daily_constrained = daily_constrained.dropna()
BL_ret_daily = BL_ret_daily
daily_ret_bayes = daily_ret_bayes.dropna()
daily_retGMPV = daily_retGMPV
df = pd.DataFrame(BL_ret_daily)
df.rename(columns=lambda x: "BL", inplace=True)
df["daily_constrained"] = daily_constrained
df["daily_ret_bayes"] = daily_ret_bayes
df["daily_retGMPV"] = daily_retGMPV
df
| BL | daily_constrained | daily_ret_bayes | daily_retGMPV | |
|---|---|---|---|---|
| Date | ||||
| 2015-01-02 | 0.004941 | 0.011386 | 0.011662 | 0.001976 |
| 2015-01-05 | -0.012776 | -0.001402 | -0.003132 | -0.015153 |
| 2015-01-06 | -0.004975 | -0.005323 | -0.004226 | -0.010168 |
| 2015-01-07 | 0.009978 | 0.009875 | 0.007483 | 0.008063 |
| 2015-01-08 | 0.003042 | -0.006514 | -0.000336 | -0.000168 |
| ... | ... | ... | ... | ... |
| 2024-01-10 | 0.001569 | -0.005379 | -0.005423 | -0.012709 |
| 2024-01-11 | -0.004858 | -0.004846 | -0.006346 | -0.006794 |
| 2024-01-12 | 0.009596 | 0.009836 | 0.012920 | 0.017116 |
| 2024-01-15 | -0.008297 | -0.008335 | -0.008380 | -0.000886 |
| 2024-01-16 | -0.006626 | -0.012359 | -0.009598 | -0.004469 |
2358 rows × 4 columns
#Black-Littermann 20%
# Mean-Variance 30%
# GMPV 40%
# Bayesian 10%
df["FINALPORTFOLIO"] = df["BL"]*0.20+df["daily_constrained"]*0.30 + df["daily_retGMPV"]*0.40+ df["daily_ret_bayes"]*0.10
df
| BL | daily_constrained | daily_ret_bayes | daily_retGMPV | FINALPORTFOLIO | |
|---|---|---|---|---|---|
| Date | |||||
| 2015-01-02 | 0.004941 | 0.011386 | 0.011662 | 0.001976 | 0.006361 |
| 2015-01-05 | -0.012776 | -0.001402 | -0.003132 | -0.015153 | -0.009350 |
| 2015-01-06 | -0.004975 | -0.005323 | -0.004226 | -0.010168 | -0.007082 |
| 2015-01-07 | 0.009978 | 0.009875 | 0.007483 | 0.008063 | 0.008931 |
| 2015-01-08 | 0.003042 | -0.006514 | -0.000336 | -0.000168 | -0.001447 |
| ... | ... | ... | ... | ... | ... |
| 2024-01-10 | 0.001569 | -0.005379 | -0.005423 | -0.012709 | -0.006926 |
| 2024-01-11 | -0.004858 | -0.004846 | -0.006346 | -0.006794 | -0.005777 |
| 2024-01-12 | 0.009596 | 0.009836 | 0.012920 | 0.017116 | 0.013008 |
| 2024-01-15 | -0.008297 | -0.008335 | -0.008380 | -0.000886 | -0.005353 |
| 2024-01-16 | -0.006626 | -0.012359 | -0.009598 | -0.004469 | -0.007780 |
2358 rows × 5 columns
df["1/N"] = daily_equally_weighted
compare_cum_return(df=df.cumsum())
FINALPORTFOLIO = pd.DataFrame(df["FINALPORTFOLIO"], index=df["FINALPORTFOLIO"].index,
columns=["FINALPORTFOLIO"])
statsFINALPORTFOLIO = statistics(returns = FINALPORTFOLIO)
statsFINALPORTFOLIO
| Mean | Standard deviation | Variance | Skewness | Kurtosis | |
|---|---|---|---|---|---|
| FINALPORTFOLIO | 0.000757 | 0.011057 | 0.000122 | -0.690703 | 9.074318 |